let af = AssociatedFile (Just f)
let downloader p' tmpfile = do
_ <- Remote.retrieveExportWithContentIdentifier
- ia loc cid (fromRawFilePath tmpfile)
+ ia loc [cid] (fromRawFilePath tmpfile)
(Left k)
(combineMeterUpdate p' p)
ok <- moveAnnex k af tmpfile
doimportsmall cidmap db loc cid sz p = do
let downloader tmpfile = do
(k, _) <- Remote.retrieveExportWithContentIdentifier
- ia loc cid (fromRawFilePath tmpfile)
+ ia loc [cid] (fromRawFilePath tmpfile)
(Right (mkkey tmpfile))
p
case keyGitSha k of
let af = AssociatedFile (Just f)
let downloader tmpfile p = do
(k, _) <- Remote.retrieveExportWithContentIdentifier
- ia loc cid (fromRawFilePath tmpfile)
+ ia loc [cid] (fromRawFilePath tmpfile)
(Right (mkkey tmpfile))
p
case keyGitSha k of
-- connection is resonably fast, it's probably as good as
-- git's handling of similar situations with files being modified while
-- it's updating the working tree for a merge.
-retrieveExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> ExportLocation -> ContentIdentifier -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
-retrieveExportWithContentIdentifierM serial adir loc cid dest gk _p = do
+retrieveExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> ExportLocation -> [ContentIdentifier] -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
+retrieveExportWithContentIdentifierM serial adir loc cids dest gk _p = do
case gk of
Right mkkey -> do
go
where
go = do
retrieve' serial src dest
- currcid <- getExportContentIdentifier serial adir loc
- when (currcid /= Right (Just cid)) $
- giveup "the file on the android device has changed"
+ getExportContentIdentifier serial adir loc >>= \case
+ Right (Just currcid)
+ | any (currcid ==) cids -> return ()
+ _ -> giveup "the file on the android device has changed"
src = androidExportLocation adir loc
storeExportWithContentIdentifierM :: AndroidSerial -> AndroidPath -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
, giveup $ "Unable to access borg repository " ++ locBorgRepo borgrepo
)
-retrieveExportWithContentIdentifierM :: BorgRepo -> ImportLocation -> ContentIdentifier -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
+retrieveExportWithContentIdentifierM :: BorgRepo -> ImportLocation -> [ContentIdentifier] -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
retrieveExportWithContentIdentifierM borgrepo loc _ dest gk _ = do
showOutput
case gk of
-- versions of git-annex ignored inodes by default, treat two content
-- idenfiers as the same if they differ only by one having the inode
-- ignored.
-guardSameContentIdentifiers :: a -> ContentIdentifier -> Maybe ContentIdentifier -> a
+guardSameContentIdentifiers :: a -> [ContentIdentifier] -> Maybe ContentIdentifier -> a
guardSameContentIdentifiers _ _ Nothing = giveup "file not found"
-guardSameContentIdentifiers cont old (Just new)
- | new == old = cont
- | ignoreinode new == old = cont
- | new == ignoreinode old = cont
+guardSameContentIdentifiers cont olds (Just new)
+ | any (new ==) olds = cont
+ | any (ignoreinode new ==) olds = cont
+ | any (\old -> new == ignoreinode old) olds = cont
| otherwise = giveup "file content has changed"
where
ignoreinode cid@(ContentIdentifier b) =
{ keySize = keySize kd <|> Just sz }
currcid <- liftIO $ mkContentIdentifier ii absf
=<< R.getSymbolicLinkStatus absf
- guardSameContentIdentifiers (return (Just k)) cid currcid
+ guardSameContentIdentifiers (return (Just k)) [cid] currcid
where
f = fromExportLocation loc
absf = dir P.</> f
, inodeCache = Nothing
}
-retrieveExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> CopyCoWTried -> ExportLocation -> ContentIdentifier -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
-retrieveExportWithContentIdentifierM ii dir cow loc cid dest gk p =
+retrieveExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> CopyCoWTried -> ExportLocation -> [ContentIdentifier] -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
+retrieveExportWithContentIdentifierM ii dir cow loc cids dest gk p =
case gk of
Right mkkey -> do
go Nothing
-- Check before copy, to avoid expensive copy of wrong file
-- content.
- precheck cont = guardSameContentIdentifiers cont cid
+ precheck cont = guardSameContentIdentifiers cont cids
=<< liftIO . mkContentIdentifier ii f
=<< liftIO (R.getSymbolicLinkStatus f)
#else
=<< R.getSymbolicLinkStatus f
#endif
- guardSameContentIdentifiers cont cid currcid
+ guardSameContentIdentifiers cont cids currcid
-- When copy-on-write was done, cannot check the handle that was
-- copied from, but such a copy should run very fast, so
postcheckcow cont = do
currcid <- liftIO $ mkContentIdentifier ii f
=<< R.getSymbolicLinkStatus f
- guardSameContentIdentifiers cont cid currcid
+ guardSameContentIdentifiers cont cids currcid
storeExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> CopyCoWTried -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier
storeExportWithContentIdentifierM ii dir cow src _k loc overwritablecids p = do
, giveup $ "exported content cannot be verified due to using the " ++ decodeBS (formatKeyVariety (fromKey keyVariety k)) ++ " backend"
)
- retrieveKeyFileFromImport dbv ciddbv k af dest p =
- getkeycids ciddbv k >>= \case
- (cid:_) -> do
+ retrieveKeyFileFromImport dbv ciddbv k af dest p = do
+ cids <- getkeycids ciddbv k
+ if not (null cids)
+ then do
l <- getfirstexportloc dbv k
- snd <$> retrieveExportWithContentIdentifier (importActions r) l cid dest (Left k) p
+ snd <$> retrieveExportWithContentIdentifier (importActions r) l cids dest (Left k) p
-- In case a content identifier is somehow missing,
-- try this instead.
- [] -> if isexport
+ else if isexport
then retrieveKeyFileFromExport dbv k af dest p
else giveup "no content identifier is recorded, unable to retrieve"
| otherwise =
i : removemostrecent mtime rest
-retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> ContentIdentifier -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
-retrieveExportWithContentIdentifierS3 hv r rs info loc cid dest gk p =
+retrieveExportWithContentIdentifierS3 :: S3HandleVar -> Remote -> RemoteStateHandle -> S3Info -> ExportLocation -> [ContentIdentifier] -> FilePath -> Either Key (Annex Key) -> MeterUpdate -> Annex (Key, Verification)
+retrieveExportWithContentIdentifierS3 hv r rs info loc (cid:_) dest gk p =
case gk of
Right _mkkey -> do
k <- go Nothing
return k
Nothing -> giveup $ needS3Creds (uuid r)
o = T.pack $ bucketExportLocation info loc
+retrieveExportWithContentIdentifierS3 _ _ _ _ _ [] _ _ _ = giveup "missing content identifier"
{- Catch exception getObject returns when a precondition is not met,
- and replace with a more understandable message for the user. -}
-- Throws exception on failure to access the remote.
, importKey :: Maybe (ImportLocation -> ContentIdentifier -> ByteSize -> MeterUpdate -> a (Maybe Key))
-- Retrieves a file from the remote. Ensures that the file
- -- it retrieves has the requested ContentIdentifier.
+ -- it retrieves has one of the requested ContentIdentifiers.
--
-- This has to be used rather than retrieveExport
-- when a special remote supports imports, since files on such a
-- Throws exception on failure.
, retrieveExportWithContentIdentifier
:: ExportLocation
- -> ContentIdentifier
+ -> [ContentIdentifier]
-- file to write content to
-> FilePath
-- Either the key, or when it's not yet known, a callback